In [2]:
import os
import cv2
import pickle
import numpy as np
import pandas as pd
import seaborn as ana
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import keras
import tensorflow
import sklearn
from tensorflow.keras.models import Model
from tensorflow.keras.utils import plot_model
from tensorflow.keras.models import Sequential
from tensorflow.keras.applications import VGG19
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.layers import Input,Lambda,Dense,Flatten,Dropout,BatchNormalization,Activation
from sklearn.metrics import confusion_matrix,classification_report,accuracy_score,recall_score,precision_score,f1_score
In [3]:
train_path = r"G:\My Drive\Dataset\Butterfly\train"
test_path = r"G:\My Drive\Dataset\Butterfly\test"
val_path = r"G:\My Drive\Dataset\Butterfly\valid"
In [4]:
for folder in os.listdir(train_path):
    sub_path = train_path + "/" + folder

    print(folder)
    for i in range(1):
        temp_path = os.listdir(sub_path)[i]
        temp_path = sub_path + "/" + temp_path
        img = mpimg.imread(temp_path)
        implot = plt.imshow(img)
        plt.show()
ADONIS
AFRICAN GIANT SWALLOWTAIL
AMERICAN SNOOT
AN 88
APPOLLO
ATALA
BANDED ORANGE HELICONIAN
BANDED PEACOCK
BECKERS WHITE
BLACK HAIRSTREAK
BLUE MORPHO
BLUE SPOTTED CROW
BROWN SIPROETA
CABBAGE WHITE
CAIRNS BIRDWING
CHECQUERED SKIPPER
CHESTNUT
CLEOPATRA
CLODIUS PARNASSIAN
CLOUDED SULPHUR
COMMON BANDED AWL
COMMON WOOD-NYMPH
COPPER TAIL
CRECENT
CRIMSON PATCH
DANAID EGGFLY
EASTERN COMA
EASTERN DAPPLE WHITE
EASTERN PINE ELFIN
ELBOWED PIERROT
GOLD BANDED
GREAT EGGFLY
GREAT JAY
GREEN CELLED CATTLEHEART
GREY HAIRSTREAK
INDRA SWALLOW
IPHICLUS SISTER
JULIA
LARGE MARBLE
MALACHITE
MANGROVE SKIPPER
MESTRA
METALMARK
MILBERTS TORTOISESHELL
MONARCH
MOURNING CLOAK
ORANGE OAKLEAF
ORANGE TIP
ORCHARD SWALLOW
PAINTED LADY
PAPER KITE
PEACOCK
PINE WHITE
PIPEVINE SWALLOW
POPINJAY
PURPLE HAIRSTREAK
PURPLISH COPPER
QUESTION MARK
RED ADMIRAL
RED CRACKER
RED POSTMAN
RED SPOTTED PURPLE
SCARCE SWALLOW
SILVER SPOT SKIPPER
SLEEPY ORANGE
SOOTYWING
SOUTHERN DOGFACE
STRAITED QUEEN
TROPICAL LEAFWING
TWO BARRED FLASHER
ULYSES
VICEROY
WOOD SATYR
YELLOW SWALLOW TAIL
ZEBRA LONG WING
In [5]:
def imagearray(path,size):
    data = []
    for folder in os.listdir(path):
        sub_path = path + "/" + folder

        for img in os.listdir(sub_path):
            image_path = sub_path + "/" + img
            img_arr = cv2.imread(image_path)
            img_arr = cv2.resize(img_arr,size)
            data.append(img_arr)
    return data
In [9]:
size = (250,250)
train = imagearray(train_path,size)
test = imagearray(test_path,size)
val = imagearray(val_path,size)
In [10]:
x_train = np.array(train)
x_test = np.array(test)
x_val = np.array(val)
In [11]:
x_train = x_train/255
x_test = x_test/255
x_val = x_val/255
In [12]:
def data_class(data_path,size,class_mode):
    datagen = ImageDataGenerator(rescale =1./255)
    classes = datagen.flow_from_directory(data_path,target_size=size,batch_size=32,class_mode=class_mode)
    return classes
In [13]:
train_class = data_class(train_path,size,"sparse")
test_class = data_class(test_path,size,"sparse")
val_class = data_class(val_path,size,"sparse")
Found 9285 images belonging to 75 classes.
Found 375 images belonging to 75 classes.
Found 375 images belonging to 75 classes.
In [14]:
y_train = train_class.classes
y_test = test_class.classes
y_val = val_class.classes
In [15]:
y_train
Out[15]:
array([ 0,  0,  0, ..., 74, 74, 74])
In [16]:
train_class.class_indices
Out[16]:
{'ADONIS': 0,
 'AFRICAN GIANT SWALLOWTAIL': 1,
 'AMERICAN SNOOT': 2,
 'AN 88': 3,
 'APPOLLO': 4,
 'ATALA': 5,
 'BANDED ORANGE HELICONIAN': 6,
 'BANDED PEACOCK': 7,
 'BECKERS WHITE': 8,
 'BLACK HAIRSTREAK': 9,
 'BLUE MORPHO': 10,
 'BLUE SPOTTED CROW': 11,
 'BROWN SIPROETA': 12,
 'CABBAGE WHITE': 13,
 'CAIRNS BIRDWING': 14,
 'CHECQUERED SKIPPER': 15,
 'CHESTNUT': 16,
 'CLEOPATRA': 17,
 'CLODIUS PARNASSIAN': 18,
 'CLOUDED SULPHUR': 19,
 'COMMON BANDED AWL': 20,
 'COMMON WOOD-NYMPH': 21,
 'COPPER TAIL': 22,
 'CRECENT': 23,
 'CRIMSON PATCH': 24,
 'DANAID EGGFLY': 25,
 'EASTERN COMA': 26,
 'EASTERN DAPPLE WHITE': 27,
 'EASTERN PINE ELFIN': 28,
 'ELBOWED PIERROT': 29,
 'GOLD BANDED': 30,
 'GREAT EGGFLY': 31,
 'GREAT JAY': 32,
 'GREEN CELLED CATTLEHEART': 33,
 'GREY HAIRSTREAK': 34,
 'INDRA SWALLOW': 35,
 'IPHICLUS SISTER': 36,
 'JULIA': 37,
 'LARGE MARBLE': 38,
 'MALACHITE': 39,
 'MANGROVE SKIPPER': 40,
 'MESTRA': 41,
 'METALMARK': 42,
 'MILBERTS TORTOISESHELL': 43,
 'MONARCH': 44,
 'MOURNING CLOAK': 45,
 'ORANGE OAKLEAF': 46,
 'ORANGE TIP': 47,
 'ORCHARD SWALLOW': 48,
 'PAINTED LADY': 49,
 'PAPER KITE': 50,
 'PEACOCK': 51,
 'PINE WHITE': 52,
 'PIPEVINE SWALLOW': 53,
 'POPINJAY': 54,
 'PURPLE HAIRSTREAK': 55,
 'PURPLISH COPPER': 56,
 'QUESTION MARK': 57,
 'RED ADMIRAL': 58,
 'RED CRACKER': 59,
 'RED POSTMAN': 60,
 'RED SPOTTED PURPLE': 61,
 'SCARCE SWALLOW': 62,
 'SILVER SPOT SKIPPER': 63,
 'SLEEPY ORANGE': 64,
 'SOOTYWING': 65,
 'SOUTHERN DOGFACE': 66,
 'STRAITED QUEEN': 67,
 'TROPICAL LEAFWING': 68,
 'TWO BARRED FLASHER': 69,
 'ULYSES': 70,
 'VICEROY': 71,
 'WOOD SATYR': 72,
 'YELLOW SWALLOW TAIL': 73,
 'ZEBRA LONG WING': 74}
In [17]:
print(y_train.shape,
y_test.shape,
y_val.shape)
(9285,) (375,) (375,)
In [18]:
vgg = VGG19(input_shape=(250,250,3),weights="imagenet",include_top=False)
In [19]:
for layer in vgg.layers:
    layer.trainable = False
In [20]:
x = Flatten()(vgg.output)
prediction = Dense(75,activation = "softmax")(x)
In [21]:
model = Model(inputs = vgg.input , outputs = prediction)
model.summary()
Model: "model"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
input_1 (InputLayer)         [(None, 250, 250, 3)]     0         
_________________________________________________________________
block1_conv1 (Conv2D)        (None, 250, 250, 64)      1792      
_________________________________________________________________
block1_conv2 (Conv2D)        (None, 250, 250, 64)      36928     
_________________________________________________________________
block1_pool (MaxPooling2D)   (None, 125, 125, 64)      0         
_________________________________________________________________
block2_conv1 (Conv2D)        (None, 125, 125, 128)     73856     
_________________________________________________________________
block2_conv2 (Conv2D)        (None, 125, 125, 128)     147584    
_________________________________________________________________
block2_pool (MaxPooling2D)   (None, 62, 62, 128)       0         
_________________________________________________________________
block3_conv1 (Conv2D)        (None, 62, 62, 256)       295168    
_________________________________________________________________
block3_conv2 (Conv2D)        (None, 62, 62, 256)       590080    
_________________________________________________________________
block3_conv3 (Conv2D)        (None, 62, 62, 256)       590080    
_________________________________________________________________
block3_conv4 (Conv2D)        (None, 62, 62, 256)       590080    
_________________________________________________________________
block3_pool (MaxPooling2D)   (None, 31, 31, 256)       0         
_________________________________________________________________
block4_conv1 (Conv2D)        (None, 31, 31, 512)       1180160   
_________________________________________________________________
block4_conv2 (Conv2D)        (None, 31, 31, 512)       2359808   
_________________________________________________________________
block4_conv3 (Conv2D)        (None, 31, 31, 512)       2359808   
_________________________________________________________________
block4_conv4 (Conv2D)        (None, 31, 31, 512)       2359808   
_________________________________________________________________
block4_pool (MaxPooling2D)   (None, 15, 15, 512)       0         
_________________________________________________________________
block5_conv1 (Conv2D)        (None, 15, 15, 512)       2359808   
_________________________________________________________________
block5_conv2 (Conv2D)        (None, 15, 15, 512)       2359808   
_________________________________________________________________
block5_conv3 (Conv2D)        (None, 15, 15, 512)       2359808   
_________________________________________________________________
block5_conv4 (Conv2D)        (None, 15, 15, 512)       2359808   
_________________________________________________________________
block5_pool (MaxPooling2D)   (None, 7, 7, 512)         0         
_________________________________________________________________
flatten (Flatten)            (None, 25088)             0         
_________________________________________________________________
dense (Dense)                (None, 75)                1881675   
=================================================================
Total params: 21,906,059
Trainable params: 1,881,675
Non-trainable params: 20,024,384
_________________________________________________________________
In [22]:
plot_model(model=model,show_shapes=True)
Out[22]:
In [23]:
early_stop = EarlyStopping(monitor ="val_loss",mode ="min",verbose=1,patience=5)
In [24]:
model.compile(loss="sparse_categorical_crossentropy",optimizer="adam",metrics=["accuracy"])
In [25]:
history = model.fit(x_train,y_train,validation_data=(x_val,y_val),epochs=6,callbacks=[early_stop],batch_size=15,shuffle=True)
Epoch 1/6
619/619 [==============================] - 4259s 7s/step - loss: 2.3294 - accuracy: 0.5898 - val_loss: 1.2544 - val_accuracy: 0.7600
Epoch 2/6
619/619 [==============================] - 4220s 7s/step - loss: 0.4789 - accuracy: 0.8891 - val_loss: 1.3895 - val_accuracy: 0.7440
Epoch 3/6
619/619 [==============================] - 4214s 7s/step - loss: 0.3567 - accuracy: 0.9212 - val_loss: 1.2813 - val_accuracy: 0.7893
Epoch 4/6
619/619 [==============================] - 8921s 14s/step - loss: 0.4377 - accuracy: 0.9185 - val_loss: 1.3195 - val_accuracy: 0.8293
Epoch 5/6
619/619 [==============================] - 4103s 7s/step - loss: 0.2714 - accuracy: 0.9507 - val_loss: 1.2369 - val_accuracy: 0.8267
Epoch 6/6
619/619 [==============================] - 4161s 7s/step - loss: 0.3686 - accuracy: 0.9361 - val_loss: 1.5580 - val_accuracy: 0.8160
In [26]:
plt.figure(figsize=(10,8))
plt.plot(history.history['accuracy'],label='train acc')
plt.plot(history.history['val_accuracy'],label='val acc')
plt.legend()
plt.title('Accuracy')
plt.show()
In [27]:
plt.figure(figsize=(10,8))
plt.plot(history.history['loss'],label='train loss')
plt.plot(history.history['val_accuracy'],label='val loss')
plt.legend()
plt.title('Loss')
plt.show()
In [28]:
model.evaluate(x_test,y_test,batch_size=15)
25/25 [==============================] - 145s 6s/step - loss: 1.5684 - accuracy: 0.8107
Out[28]:
[1.5683584213256836, 0.8106666803359985]